In [1]:
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
In [2]:
!pip install tensorflow-io
Collecting tensorflow-io
  Downloading https://files.pythonhosted.org/packages/07/3c/b45c30448cd6a04f25b088da024229149323fa44bc6322a7372bb556eada/tensorflow_io-0.17.0-cp36-cp36m-manylinux2010_x86_64.whl (25.3MB)
     |████████████████████████████████| 25.3MB 136kB/s 
Requirement already satisfied: tensorflow<2.5.0,>=2.4.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-io) (2.4.0)
Requirement already satisfied: keras-preprocessing~=1.1.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.1.2)
Requirement already satisfied: termcolor~=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.1.0)
Requirement already satisfied: gast==0.3.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.3.3)
Requirement already satisfied: tensorflow-estimator<2.5.0,>=2.4.0rc0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.4.0)
Requirement already satisfied: wrapt~=1.12.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.12.1)
Requirement already satisfied: opt-einsum~=3.3.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.3.0)
Requirement already satisfied: absl-py~=0.10 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.10.0)
Requirement already satisfied: protobuf>=3.9.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.12.4)
Requirement already satisfied: flatbuffers~=1.12.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.12)
Requirement already satisfied: google-pasta~=0.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.2.0)
Requirement already satisfied: wheel~=0.35 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.36.2)
Requirement already satisfied: six~=1.15.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.15.0)
Requirement already satisfied: tensorboard~=2.4 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.4.0)
Requirement already satisfied: h5py~=2.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.10.0)
Requirement already satisfied: grpcio~=1.32.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.32.0)
Requirement already satisfied: typing-extensions~=3.7.4 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.7.4.3)
Requirement already satisfied: numpy~=1.19.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.19.5)
Requirement already satisfied: astunparse~=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.6.3)
Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.9.2->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (51.1.1)
Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.3.3)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.7.0)
Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.17.2)
Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.0.1)
Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.23.0)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.4.2)
Requirement already satisfied: importlib-metadata; python_version < "3.8" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.3.0)
Requirement already satisfied: rsa<5,>=3.1.4; python_version >= "3" in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (4.6)
Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.2.8)
Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (4.2.0)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2020.12.5)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.24.3)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.0.4)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.10)
Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.3.0)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < "3.8"->markdown>=2.6.8->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.4.0)
Requirement already satisfied: pyasn1>=0.1.3 in /usr/local/lib/python3.6/dist-packages (from rsa<5,>=3.1.4; python_version >= "3"->google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.4.8)
Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.1.0)
Installing collected packages: tensorflow-io
Successfully installed tensorflow-io-0.17.0
In [3]:
!pip install pydicom
Collecting pydicom
  Downloading https://files.pythonhosted.org/packages/f4/15/df16546bc59bfca390cf072d473fb2c8acd4231636f64356593a63137e55/pydicom-2.1.2-py3-none-any.whl (1.9MB)
     |████████████████████████████████| 1.9MB 15.5MB/s 
Installing collected packages: pydicom
Successfully installed pydicom-2.1.2
In [4]:
import tensorflow as tf

AUTO = tf.data.experimental.AUTOTUNE
# Detect hardware, return appropriate distribution strategy
try:
    tpu = tf.distribute.cluster_resolver.TPUClusterResolver()  # TPU detection. No parameters necessary if TPU_NAME environment variable is set. On Kaggle this is always the case.
    print('Running on TPU ', tpu.master())
except ValueError:
    tpu = None

if tpu:
    tf.config.experimental_connect_to_cluster(tpu)
    tf.tpu.experimental.initialize_tpu_system(tpu)
    strategy = tf.distribute.experimental.TPUStrategy(tpu)
else:
    strategy = tf.distribute.get_strategy() # default distribution strategy in Tensorflow. Works on CPU and single GPU.

print("REPLICAS: ", strategy.num_replicas_in_sync)
REPLICAS:  1
In [5]:
import tensorflow_io as tfio
import pathlib
import os
from os import listdir
from os.path import join
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import PIL
import scipy as sp
import PIL.Image
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow_hub as hub
import functools
from glob import glob
import glob
from sklearn.model_selection import train_test_split
import pydicom
from tqdm import tqdm
import warnings
warnings.filterwarnings('ignore')
# For more information about autotune:
# https://www.tensorflow.org/guide/data_performance#prefetching
AUTOTUNE = tf.data.experimental.AUTOTUNE
print(f'Tensorflow ver. {tf.__version__}')

np.set_printoptions(precision=4)
Tensorflow ver. 2.4.0
In [6]:
from glob import glob
import glob

#reading all dcm files into train and test
train = sorted(glob.glob("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/*/*/*.dcm"))
test = sorted(glob.glob("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-test/*/*/*.dcm"))
In [7]:
# Print out the first 5 file names to verify we're in the right folder.
print ("There are total of %d DICOM images in train:" % len(train))
print('*' * 50)
print ("There are total of %d DICOM images in test:" % len(test))
There are total of 10722 DICOM images in train:
**************************************************
There are total of 1377 DICOM images in test:
In [8]:
# load masks for training images
dataset = pd.read_csv('/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/train-rle.csv', delimiter=",")
dataset.head()
Out[8]:
ImageId EncodedPixels
0 1.2.276.0.7230010.3.1.4.8323329.5597.151787518... -1
1 1.2.276.0.7230010.3.1.4.8323329.12515.15178752... -1
2 1.2.276.0.7230010.3.1.4.8323329.4904.151787518... 175349 7 1013 12 1009 17 1005 19 1003 20 1002...
3 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 407576 2 1021 7 1015 10 1013 12 1011 14 1008 ...
4 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 252069 1 1021 3 1020 4 1018 5 1018 6 1016 7 1...
In [9]:
# add column if the file is duplicate or not
dataset['isDuplicate'] = dataset['ImageId'].duplicated()
dataset.head()
Out[9]:
ImageId EncodedPixels isDuplicate
0 1.2.276.0.7230010.3.1.4.8323329.5597.151787518... -1 False
1 1.2.276.0.7230010.3.1.4.8323329.12515.15178752... -1 False
2 1.2.276.0.7230010.3.1.4.8323329.4904.151787518... 175349 7 1013 12 1009 17 1005 19 1003 20 1002... False
3 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 407576 2 1021 7 1015 10 1013 12 1011 14 1008 ... False
4 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 252069 1 1021 3 1020 4 1018 5 1018 6 1016 7 1... True
In [10]:
# check where the files are duplicate
dupImages = dataset.index[dataset['isDuplicate']==True]
print(f"We have total {len(dupImages)} duplicate image ids")
We have total 907 duplicate image ids
In [11]:
print(f"With duplicates we have total {len(dataset)} files.")
dataset = dataset.drop(list(dupImages))
print(f"Without duplicates we have total {len(dataset)} files.")
With duplicates we have total 11582 files.
Without duplicates we have total 10675 files.
In [12]:
dataset = dataset.drop('isDuplicate', axis=1)
In [13]:
#dataframe to ease the access
patients = []
missing = 0
remove=[]
pd.reset_option('max_colwidth')

for t in train:
  data = pydicom.dcmread(t)
  patient = {}
  patient["UID"] = data.SOPInstanceUID
  try:
    encoded_pixels = dataset[dataset["ImageId"] == patient["UID"]].values[0][1]
    patient["EncodedPixels"] = encoded_pixels
  except:
    missing = missing + 1
    remove.append("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/" + data.StudyInstanceUID + "/" + data.SeriesInstanceUID + "/" + data.SOPInstanceUID + ".dcm")
  patient["Age"] = data.PatientAge
  patient["Sex"] = data.PatientSex
  patient["Modality"] = data.Modality
  patient["BodyPart"] = data.BodyPartExamined
  patient["ViewPosition"] = data.ViewPosition
  patient["path"] = "/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/" + data.StudyInstanceUID + "/" + data.SeriesInstanceUID + "/" + data.SOPInstanceUID + ".dcm"
  patients.append(patient)

patients_train = pd.DataFrame(patients,columns=["UID", "EncodedPixels","path"])
patients_train=patients_train.loc[~patients_train['path'].isin(remove)] #remove rows which do not have images
patients_train = patients_train[patients_train["EncodedPixels"] != ' -1']
patients_train.drop('UID',axis=1,inplace=True)
patients_train.head()
Out[13]:
EncodedPixels path
6 209126 1 1019 6 1015 10 1012 13 1010 14 1008 ... /content/drive/MyDrive/siim-acr-pneumothorax/p...
13 891504 5 1018 8 1015 10 1013 12 1011 14 1009 ... /content/drive/MyDrive/siim-acr-pneumothorax/p...
14 261328 6 1015 11 1011 15 1007 18 1004 21 1002... /content/drive/MyDrive/siim-acr-pneumothorax/p...
18 592184 33 976 58 956 73 941 88 926 102 917 10... /content/drive/MyDrive/siim-acr-pneumothorax/p...
28 530522 1 1022 3 1019 6 1017 7 1016 9 1014 10 ... /content/drive/MyDrive/siim-acr-pneumothorax/p...
In [14]:
def rle2mask(rle, width, height):
    """
    RLE to mask conversion provided by competetion organizers with the dataset.
    """
    mask= np.zeros(width* height)
    array = np.asarray([int(x) for x in rle.split()])
    starts = array[0::2]
    lengths = array[1::2]

    current_position = 0
    for index, start in enumerate(starts):
        current_position += start
        mask[current_position:current_position+lengths[index]] = 255
        current_position += lengths[index]

    return mask.reshape(width, height)
In [15]:
file_paths=patients_train['path'].values
labels=patients_train['EncodedPixels'].values
In [16]:
# import mask_functions
from PIL import Image
mask=np.zeros((len(patients_train),256,256,1),dtype=np.bool)
for j,i in tqdm(enumerate(patients_train['EncodedPixels'].values)):
  a=(Image.fromarray(rle2mask(i,1024,1024).T).resize((256,256), resample=Image.BILINEAR)) #obtaining the masks and then resizing them
  a=np.array(a)
  a=np.expand_dims(a,axis=-1)
  mask[j]=a
2381it [00:54, 43.34it/s]
In [17]:
train_path=[]
train_mask=[]
test_path=[]
test_mask=[]
train_len=len(patients_train)-int(len(patients_train)*0.2) #We are dividing the data into train and test
test_len=int(len(patients_train)*0.2)
count=0
for i in tqdm(range(0,len(patients_train))):
  if count<=train_len:
    train_path.append(file_paths[i])
    train_mask.append(mask[i])
    count=count+1
  else:
    test_path.append(file_paths[i])
    test_mask.append(mask[i])
    
train_path=np.array(train_path)
test_path=np.array(test_path)
100%|██████████| 2381/2381 [00:00<00:00, 506987.40it/s]
In [18]:
train_ds = tf.data.Dataset.from_tensor_slices((train_path,train_mask))
train_ds = train_ds.shuffle(len(train_path),seed=42)
test_ds = tf.data.Dataset.from_tensor_slices((test_path,test_mask))
test_ds = test_ds.shuffle(len(test_path),seed=42)
In [19]:
def decode_img(img):
# convert the compressed string to a 3D uint8 tensor
#image_bytes = tf.io.read_file(img)
  image = tfio.image.decode_dicom_image(img, dtype=tf.uint8,color_dim=True,scale='preserve')
  image = tf.image.convert_image_dtype(image, tf.float32)#converting the image to tf.float32
  image=tf.squeeze(image,[0]) #squeezing the image because the file is of the shape(1,1024,1024,1) and we want (1024,1024,3)
  b = tf.constant([1,1,3], tf.int32)
  image=tf.tile(image,b)#the image is of the shape (1024,1024,1) to make it (1024,1024,3) I am using tf.tile
  image=tf.image.resize(image,size=[256,256]) # resize the image to the desired size
  return image
  
def process_path(file_path,label):
  img = tf.io.read_file(file_path) #reading the image from the file path
  img = decode_img(img) #passing the image to the function
  return img,label

def augment(image,label):
  a=tf.random.uniform((),minval=0,maxval=1)
  if a<0.2:
    image=tf.image.flip_left_right(image)
    label=tf.image.flip_left_right(label)
  if a<0.4 and a>0.2:
    image = tf.image.random_brightness(image, max_delta=0.15) # Random brightness
  if a<0.6 and a>0.4:
    image=tf.image.adjust_gamma(image, gamma=tf.random.uniform((),minval=0,maxval=1), gain=1)
  if a<0.8 and a>0.6:
    image=tf.image.random_contrast(image,lower=0.2,upper=0.3)
  if a<1.0 and a>0.8:
    image=tf.image.random_saturation(image, lower=2, upper=5)
  return image, label

def set_shapes(img, label, img_shape=(256,256,3)):
  img.set_shape(img_shape)
  label.set_shape((256,256,1))
  return img, label
In [20]:
import keras.backend as K
AUTOTUNE = tf.data.experimental.AUTOTUNE
train_ds = train_ds.map(process_path,num_parallel_calls=AUTOTUNE) #mapping the file paths to the above function
train_ds = train_ds.map(augment,num_parallel_calls=AUTOTUNE) #augmenting train data
train_ds = train_ds.map(set_shapes, num_parallel_calls=AUTOTUNE) #set the shape of train data
val_ds = test_ds.map(process_path,num_parallel_calls=AUTOTUNE)
val_ds = val_ds.map(set_shapes, num_parallel_calls=AUTOTUNE)
In [21]:
BATCH_SIZE = 32
BUFFER_SIZE = 1500
SEED = 42
AUTOTUNE = tf.data.experimental.AUTOTUNE

train_ds = train_ds.shuffle(buffer_size=BUFFER_SIZE, seed=SEED)
train_ds = train_ds.cache()
train_ds = train_ds.batch(BATCH_SIZE)
train_ds = train_ds.prefetch(buffer_size=AUTOTUNE)
print(train_ds)

# Preparing the Validation Dataset
val_ds = val_ds.cache()
val_ds = val_ds.batch(BATCH_SIZE)
val_ds = val_ds.prefetch(buffer_size=AUTOTUNE)
print(val_ds)
<PrefetchDataset shapes: ((None, 256, 256, 3), (None, 256, 256, 1)), types: (tf.float32, tf.bool)>
<PrefetchDataset shapes: ((None, 256, 256, 3), (None, 256, 256, 1)), types: (tf.float32, tf.bool)>
In [21]:
from tensorflow.keras import backend as K
from tensorflow.keras.losses import binary_crossentropy

def dice_coef(y_true, y_pred, smooth=1):
    y_true_f = K.flatten(y_true)
    y_pred_f = K.flatten(y_pred)
    intersection = K.sum(y_true_f * y_pred_f)
    return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
  
def bce_dice_loss(y_true, y_predict):
    return binary_crossentropy(y_true, y_predict) + (1-dice_coef(y_true, y_predict))

def  dice_loss(y_true, y_predict):
    return (1-dice_coef(y_true, y_predict))
In [22]:
import random, re, math
import tensorflow as tf, tensorflow.keras.backend as K
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Model
from tensorflow.keras import optimizers
from tensorflow.keras.models import Sequential
import tensorflow.keras.layers as L
from tensorflow.keras.applications import ResNet152V2, InceptionResNetV2, InceptionV3, Xception, VGG19
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from sklearn.model_selection import train_test_split
from tensorflow.keras.layers import Dense, Input, Dropout, Lambda, Conv2D, Conv2DTranspose, MaxPooling2D, Concatenate, Activation, Add, multiply, add, concatenate, LeakyReLU, ZeroPadding2D, UpSampling2D, BatchNormalization, Flatten
from tensorflow.keras.regularizers import l2
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from tensorflow.keras.losses import binary_crossentropy
from tensorflow.keras import backend as K 

from sklearn.metrics import classification_report, confusion_matrix
# imports for building the network
from tensorflow import reduce_sum
from tensorflow.keras.backend import pow
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, MaxPool2D, UpSampling2D, Concatenate, Add, Flatten
from sklearn.model_selection import train_test_split
import cv2
import matplotlib.pyplot as plt
from tensorflow.keras.applications import DenseNet121, ResNet50V2, DenseNet169, InceptionResNetV2,MobileNetV2, NASNetMobile, DenseNet201, NASNetLarge, Xception
from tensorflow.keras.layers import Average
In [24]:
# https://yann-leguilly.gitlab.io/post/2019-12-14-tensorflow-tfdata-segmentation/

IMG_SIZE = 256
N_CHANNELS = 3
# Build U-Net model
inputs = Input((IMG_SIZE, IMG_SIZE, N_CHANNELS))
s = Lambda(lambda x: x / 255) (inputs)

c1 = Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (s)
c1 = Dropout(0.1) (c1)
c1 = Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c1)
p1 = MaxPooling2D((2, 2)) (c1)

c2 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (p1)
c2 = Dropout(0.1) (c2)
c2 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c2)
p2 = MaxPooling2D((2, 2)) (c2)

c3 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (p2)
c3 = Dropout(0.2) (c3)
c3 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c3)
p3 = MaxPooling2D((2, 2)) (c3)

c4 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (p3)
c4 = Dropout(0.2) (c4)
c4 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c4)
p4 = MaxPooling2D(pool_size=(2, 2)) (c4)

c5 = Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (p4)
c5 = Dropout(0.3) (c5)
c5 = Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c5)

u6 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same') (c5)
u6 = concatenate([u6, c4])
c6 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (u6)
c6 = Dropout(0.2) (c6)
c6 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c6)

u7 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same') (c6)
u7 = concatenate([u7, c3])
c7 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (u7)
c7 = Dropout(0.2) (c7)
c7 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c7)

u8 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same') (c7)
u8 = concatenate([u8, c2])
c8 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (u8)
c8 = Dropout(0.1) (c8)
c8 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c8)

u9 = Conv2DTranspose(16, (2, 2), strides=(2, 2), padding='same') (c8)
u9 = concatenate([u9, c1], axis=3)
c9 = Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (u9)
c9 = Dropout(0.1) (c9)
c9 = Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same') (c9)

outputs = Conv2D(1, (1, 1), activation='sigmoid') (c9)
In [25]:
model = tf.keras.Model(inputs=[inputs], outputs=[outputs])
In [26]:
model.compile(optimizer=Adam(learning_rate=0.001), loss = 'binary_crossentropy', metrics=['accuracy', dice_coef])
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 256, 256, 3) 0                                            
__________________________________________________________________________________________________
lambda (Lambda)                 (None, 256, 256, 3)  0           input_1[0][0]                    
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 256, 256, 16) 448         lambda[0][0]                     
__________________________________________________________________________________________________
dropout (Dropout)               (None, 256, 256, 16) 0           conv2d[0][0]                     
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 256, 256, 16) 2320        dropout[0][0]                    
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D)    (None, 128, 128, 16) 0           conv2d_1[0][0]                   
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 128, 128, 32) 4640        max_pooling2d[0][0]              
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 128, 128, 32) 0           conv2d_2[0][0]                   
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 128, 128, 32) 9248        dropout_1[0][0]                  
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D)  (None, 64, 64, 32)   0           conv2d_3[0][0]                   
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 64, 64, 64)   18496       max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 64, 64, 64)   0           conv2d_4[0][0]                   
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 64, 64, 64)   36928       dropout_2[0][0]                  
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, 32, 32, 64)   0           conv2d_5[0][0]                   
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 32, 32, 128)  73856       max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 32, 32, 128)  0           conv2d_6[0][0]                   
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 32, 32, 128)  147584      dropout_3[0][0]                  
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, 16, 16, 128)  0           conv2d_7[0][0]                   
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 16, 16, 256)  295168      max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
dropout_4 (Dropout)             (None, 16, 16, 256)  0           conv2d_8[0][0]                   
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 16, 16, 256)  590080      dropout_4[0][0]                  
__________________________________________________________________________________________________
conv2d_transpose (Conv2DTranspo (None, 32, 32, 128)  131200      conv2d_9[0][0]                   
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 32, 32, 256)  0           conv2d_transpose[0][0]           
                                                                 conv2d_7[0][0]                   
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 32, 32, 128)  295040      concatenate[0][0]                
__________________________________________________________________________________________________
dropout_5 (Dropout)             (None, 32, 32, 128)  0           conv2d_10[0][0]                  
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, 32, 32, 128)  147584      dropout_5[0][0]                  
__________________________________________________________________________________________________
conv2d_transpose_1 (Conv2DTrans (None, 64, 64, 64)   32832       conv2d_11[0][0]                  
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 64, 64, 128)  0           conv2d_transpose_1[0][0]         
                                                                 conv2d_5[0][0]                   
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, 64, 64, 64)   73792       concatenate_1[0][0]              
__________________________________________________________________________________________________
dropout_6 (Dropout)             (None, 64, 64, 64)   0           conv2d_12[0][0]                  
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, 64, 64, 64)   36928       dropout_6[0][0]                  
__________________________________________________________________________________________________
conv2d_transpose_2 (Conv2DTrans (None, 128, 128, 32) 8224        conv2d_13[0][0]                  
__________________________________________________________________________________________________
concatenate_2 (Concatenate)     (None, 128, 128, 64) 0           conv2d_transpose_2[0][0]         
                                                                 conv2d_3[0][0]                   
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, 128, 128, 32) 18464       concatenate_2[0][0]              
__________________________________________________________________________________________________
dropout_7 (Dropout)             (None, 128, 128, 32) 0           conv2d_14[0][0]                  
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, 128, 128, 32) 9248        dropout_7[0][0]                  
__________________________________________________________________________________________________
conv2d_transpose_3 (Conv2DTrans (None, 256, 256, 16) 2064        conv2d_15[0][0]                  
__________________________________________________________________________________________________
concatenate_3 (Concatenate)     (None, 256, 256, 32) 0           conv2d_transpose_3[0][0]         
                                                                 conv2d_1[0][0]                   
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, 256, 256, 16) 4624        concatenate_3[0][0]              
__________________________________________________________________________________________________
dropout_8 (Dropout)             (None, 256, 256, 16) 0           conv2d_16[0][0]                  
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, 256, 256, 16) 2320        dropout_8[0][0]                  
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, 256, 256, 1)  17          conv2d_17[0][0]                  
==================================================================================================
Total params: 1,941,105
Trainable params: 1,941,105
Non-trainable params: 0
__________________________________________________________________________________________________
In [27]:
from tensorflow.keras.utils import plot_model
plot_model(model, 'model.png', show_shapes=True)
Out[27]:
In [28]:
# tensor-board in colab
# Refer: https://www.tensorflow.org/tensorboard/get_started
import os
import datetime

! rm -rf ./logs/ 
logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print(logdir)
logs/20210114-124932
In [29]:
import os
os.mkdir("model_save")
In [30]:
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import CSVLogger

filepath="model_save/"
checkpoints = ModelCheckpoint(filepath+'simple_unet_weights-{epoch:02d}-{val_dice_coef:.4f}.hdf5', monitor='val_dice_coef', save_weights_only=True, verbose=1, save_best_only=True, mode='max')

train_log = CSVLogger(filepath+'simple_unet.log') #storing the training results in a pandas dataframe

tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)

callbacks_list = [checkpoints, train_log, tensorboard_callback]
In [31]:
%load_ext tensorboard
%tensorboard --logdir $logdir
In [32]:
history1 = model.fit(train_ds,epochs=75,batch_size=64,validation_data=val_ds,callbacks=callbacks_list)
Epoch 1/75
60/60 [==============================] - 112s 714ms/step - loss: 0.3231 - accuracy: 0.9254 - dice_coef: 0.0165 - val_loss: 0.0799 - val_accuracy: 0.9862 - val_dice_coef: 0.0275

Epoch 00001: val_dice_coef improved from -inf to 0.02753, saving model to model_save/simple_unet_weights-01-0.0275.hdf5
Epoch 2/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0689 - accuracy: 0.9864 - dice_coef: 0.0233 - val_loss: 0.0728 - val_accuracy: 0.9862 - val_dice_coef: 0.0392

Epoch 00002: val_dice_coef improved from 0.02753 to 0.03917, saving model to model_save/simple_unet_weights-02-0.0392.hdf5
Epoch 3/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0647 - accuracy: 0.9864 - dice_coef: 0.0359 - val_loss: 0.0717 - val_accuracy: 0.9862 - val_dice_coef: 0.0423

Epoch 00003: val_dice_coef improved from 0.03917 to 0.04233, saving model to model_save/simple_unet_weights-03-0.0423.hdf5
Epoch 4/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0633 - accuracy: 0.9864 - dice_coef: 0.0406 - val_loss: 0.0702 - val_accuracy: 0.9862 - val_dice_coef: 0.0424

Epoch 00004: val_dice_coef improved from 0.04233 to 0.04243, saving model to model_save/simple_unet_weights-04-0.0424.hdf5
Epoch 5/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0633 - accuracy: 0.9864 - dice_coef: 0.0423 - val_loss: 0.0688 - val_accuracy: 0.9862 - val_dice_coef: 0.0435

Epoch 00005: val_dice_coef improved from 0.04243 to 0.04345, saving model to model_save/simple_unet_weights-05-0.0435.hdf5
Epoch 6/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0633 - accuracy: 0.9864 - dice_coef: 0.0427 - val_loss: 0.0657 - val_accuracy: 0.9862 - val_dice_coef: 0.0433

Epoch 00006: val_dice_coef did not improve from 0.04345
Epoch 7/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0631 - accuracy: 0.9864 - dice_coef: 0.0428 - val_loss: 0.0650 - val_accuracy: 0.9862 - val_dice_coef: 0.0433

Epoch 00007: val_dice_coef did not improve from 0.04345
Epoch 8/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0634 - accuracy: 0.9864 - dice_coef: 0.0427 - val_loss: 0.0671 - val_accuracy: 0.9862 - val_dice_coef: 0.0414

Epoch 00008: val_dice_coef did not improve from 0.04345
Epoch 9/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0623 - accuracy: 0.9864 - dice_coef: 0.0433 - val_loss: 0.0659 - val_accuracy: 0.9862 - val_dice_coef: 0.0388

Epoch 00009: val_dice_coef did not improve from 0.04345
Epoch 10/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0621 - accuracy: 0.9864 - dice_coef: 0.0431 - val_loss: 0.0659 - val_accuracy: 0.9862 - val_dice_coef: 0.0445

Epoch 00010: val_dice_coef improved from 0.04345 to 0.04448, saving model to model_save/simple_unet_weights-10-0.0445.hdf5
Epoch 11/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0619 - accuracy: 0.9864 - dice_coef: 0.0446 - val_loss: 0.0642 - val_accuracy: 0.9862 - val_dice_coef: 0.0454

Epoch 00011: val_dice_coef improved from 0.04448 to 0.04543, saving model to model_save/simple_unet_weights-11-0.0454.hdf5
Epoch 12/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0615 - accuracy: 0.9864 - dice_coef: 0.0457 - val_loss: 0.0641 - val_accuracy: 0.9862 - val_dice_coef: 0.0444

Epoch 00012: val_dice_coef did not improve from 0.04543
Epoch 13/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0613 - accuracy: 0.9864 - dice_coef: 0.0477 - val_loss: 0.0624 - val_accuracy: 0.9862 - val_dice_coef: 0.0489

Epoch 00013: val_dice_coef improved from 0.04543 to 0.04893, saving model to model_save/simple_unet_weights-13-0.0489.hdf5
Epoch 14/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0605 - accuracy: 0.9864 - dice_coef: 0.0485 - val_loss: 0.0615 - val_accuracy: 0.9862 - val_dice_coef: 0.0528

Epoch 00014: val_dice_coef improved from 0.04893 to 0.05281, saving model to model_save/simple_unet_weights-14-0.0528.hdf5
Epoch 15/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0595 - accuracy: 0.9864 - dice_coef: 0.0570 - val_loss: 0.0605 - val_accuracy: 0.9862 - val_dice_coef: 0.0609

Epoch 00015: val_dice_coef improved from 0.05281 to 0.06085, saving model to model_save/simple_unet_weights-15-0.0609.hdf5
Epoch 16/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0585 - accuracy: 0.9864 - dice_coef: 0.0640 - val_loss: 0.0593 - val_accuracy: 0.9862 - val_dice_coef: 0.0724

Epoch 00016: val_dice_coef improved from 0.06085 to 0.07238, saving model to model_save/simple_unet_weights-16-0.0724.hdf5
Epoch 17/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0571 - accuracy: 0.9864 - dice_coef: 0.0735 - val_loss: 0.0563 - val_accuracy: 0.9862 - val_dice_coef: 0.0810

Epoch 00017: val_dice_coef improved from 0.07238 to 0.08103, saving model to model_save/simple_unet_weights-17-0.0810.hdf5
Epoch 18/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0552 - accuracy: 0.9864 - dice_coef: 0.0842 - val_loss: 0.0524 - val_accuracy: 0.9862 - val_dice_coef: 0.0932

Epoch 00018: val_dice_coef improved from 0.08103 to 0.09321, saving model to model_save/simple_unet_weights-18-0.0932.hdf5
Epoch 19/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0540 - accuracy: 0.9864 - dice_coef: 0.0945 - val_loss: 0.0527 - val_accuracy: 0.9862 - val_dice_coef: 0.0903

Epoch 00019: val_dice_coef did not improve from 0.09321
Epoch 20/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0526 - accuracy: 0.9864 - dice_coef: 0.0995 - val_loss: 0.0513 - val_accuracy: 0.9862 - val_dice_coef: 0.1008

Epoch 00020: val_dice_coef improved from 0.09321 to 0.10084, saving model to model_save/simple_unet_weights-20-0.1008.hdf5
Epoch 21/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0521 - accuracy: 0.9864 - dice_coef: 0.1049 - val_loss: 0.0507 - val_accuracy: 0.9862 - val_dice_coef: 0.1162

Epoch 00021: val_dice_coef improved from 0.10084 to 0.11617, saving model to model_save/simple_unet_weights-21-0.1162.hdf5
Epoch 22/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0517 - accuracy: 0.9864 - dice_coef: 0.1080 - val_loss: 0.0508 - val_accuracy: 0.9862 - val_dice_coef: 0.1224

Epoch 00022: val_dice_coef improved from 0.11617 to 0.12241, saving model to model_save/simple_unet_weights-22-0.1224.hdf5
Epoch 23/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0504 - accuracy: 0.9864 - dice_coef: 0.1153 - val_loss: 0.0506 - val_accuracy: 0.9862 - val_dice_coef: 0.1153

Epoch 00023: val_dice_coef did not improve from 0.12241
Epoch 24/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0498 - accuracy: 0.9864 - dice_coef: 0.1192 - val_loss: 0.0501 - val_accuracy: 0.9862 - val_dice_coef: 0.1216

Epoch 00024: val_dice_coef did not improve from 0.12241
Epoch 25/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0492 - accuracy: 0.9864 - dice_coef: 0.1223 - val_loss: 0.0497 - val_accuracy: 0.9862 - val_dice_coef: 0.1191

Epoch 00025: val_dice_coef did not improve from 0.12241
Epoch 26/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0483 - accuracy: 0.9864 - dice_coef: 0.1269 - val_loss: 0.0493 - val_accuracy: 0.9862 - val_dice_coef: 0.1288

Epoch 00026: val_dice_coef improved from 0.12241 to 0.12884, saving model to model_save/simple_unet_weights-26-0.1288.hdf5
Epoch 27/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0483 - accuracy: 0.9864 - dice_coef: 0.1298 - val_loss: 0.0503 - val_accuracy: 0.9862 - val_dice_coef: 0.1246

Epoch 00027: val_dice_coef did not improve from 0.12884
Epoch 28/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0478 - accuracy: 0.9864 - dice_coef: 0.1322 - val_loss: 0.0517 - val_accuracy: 0.9862 - val_dice_coef: 0.1541

Epoch 00028: val_dice_coef improved from 0.12884 to 0.15414, saving model to model_save/simple_unet_weights-28-0.1541.hdf5
Epoch 29/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0474 - accuracy: 0.9864 - dice_coef: 0.1443 - val_loss: 0.0511 - val_accuracy: 0.9863 - val_dice_coef: 0.1544

Epoch 00029: val_dice_coef improved from 0.15414 to 0.15445, saving model to model_save/simple_unet_weights-29-0.1544.hdf5
Epoch 30/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0464 - accuracy: 0.9864 - dice_coef: 0.1490 - val_loss: 0.0502 - val_accuracy: 0.9862 - val_dice_coef: 0.1381

Epoch 00030: val_dice_coef did not improve from 0.15445
Epoch 31/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0457 - accuracy: 0.9863 - dice_coef: 0.1553 - val_loss: 0.0529 - val_accuracy: 0.9862 - val_dice_coef: 0.1666

Epoch 00031: val_dice_coef improved from 0.15445 to 0.16660, saving model to model_save/simple_unet_weights-31-0.1666.hdf5
Epoch 32/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0450 - accuracy: 0.9865 - dice_coef: 0.1661 - val_loss: 0.0515 - val_accuracy: 0.9862 - val_dice_coef: 0.1651

Epoch 00032: val_dice_coef did not improve from 0.16660
Epoch 33/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0455 - accuracy: 0.9863 - dice_coef: 0.1610 - val_loss: 0.0491 - val_accuracy: 0.9862 - val_dice_coef: 0.1658

Epoch 00033: val_dice_coef did not improve from 0.16660
Epoch 34/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0445 - accuracy: 0.9864 - dice_coef: 0.1693 - val_loss: 0.0513 - val_accuracy: 0.9860 - val_dice_coef: 0.1760

Epoch 00034: val_dice_coef improved from 0.16660 to 0.17597, saving model to model_save/simple_unet_weights-34-0.1760.hdf5
Epoch 35/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0436 - accuracy: 0.9865 - dice_coef: 0.1855 - val_loss: 0.0507 - val_accuracy: 0.9854 - val_dice_coef: 0.1801

Epoch 00035: val_dice_coef improved from 0.17597 to 0.18009, saving model to model_save/simple_unet_weights-35-0.1801.hdf5
Epoch 36/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0427 - accuracy: 0.9865 - dice_coef: 0.1981 - val_loss: 0.0522 - val_accuracy: 0.9840 - val_dice_coef: 0.2059

Epoch 00036: val_dice_coef improved from 0.18009 to 0.20586, saving model to model_save/simple_unet_weights-36-0.2059.hdf5
Epoch 37/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0407 - accuracy: 0.9868 - dice_coef: 0.2221 - val_loss: 0.0573 - val_accuracy: 0.9813 - val_dice_coef: 0.1968

Epoch 00037: val_dice_coef did not improve from 0.20586
Epoch 38/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0426 - accuracy: 0.9867 - dice_coef: 0.2064 - val_loss: 0.0529 - val_accuracy: 0.9834 - val_dice_coef: 0.2187

Epoch 00038: val_dice_coef improved from 0.20586 to 0.21869, saving model to model_save/simple_unet_weights-38-0.2187.hdf5
Epoch 39/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0397 - accuracy: 0.9869 - dice_coef: 0.2408 - val_loss: 0.0529 - val_accuracy: 0.9831 - val_dice_coef: 0.2028

Epoch 00039: val_dice_coef did not improve from 0.21869
Epoch 40/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0383 - accuracy: 0.9873 - dice_coef: 0.2622 - val_loss: 0.0550 - val_accuracy: 0.9825 - val_dice_coef: 0.2167

Epoch 00040: val_dice_coef did not improve from 0.21869
Epoch 41/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0373 - accuracy: 0.9878 - dice_coef: 0.2836 - val_loss: 0.0568 - val_accuracy: 0.9815 - val_dice_coef: 0.2200

Epoch 00041: val_dice_coef improved from 0.21869 to 0.21997, saving model to model_save/simple_unet_weights-41-0.2200.hdf5
Epoch 42/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0366 - accuracy: 0.9878 - dice_coef: 0.2907 - val_loss: 0.0565 - val_accuracy: 0.9817 - val_dice_coef: 0.2409

Epoch 00042: val_dice_coef improved from 0.21997 to 0.24095, saving model to model_save/simple_unet_weights-42-0.2409.hdf5
Epoch 43/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0368 - accuracy: 0.9876 - dice_coef: 0.2896 - val_loss: 0.0556 - val_accuracy: 0.9821 - val_dice_coef: 0.2189

Epoch 00043: val_dice_coef did not improve from 0.24095
Epoch 44/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0348 - accuracy: 0.9883 - dice_coef: 0.3196 - val_loss: 0.0569 - val_accuracy: 0.9824 - val_dice_coef: 0.2396

Epoch 00044: val_dice_coef did not improve from 0.24095
Epoch 45/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0335 - accuracy: 0.9887 - dice_coef: 0.3438 - val_loss: 0.0604 - val_accuracy: 0.9818 - val_dice_coef: 0.2437

Epoch 00045: val_dice_coef improved from 0.24095 to 0.24372, saving model to model_save/simple_unet_weights-45-0.2437.hdf5
Epoch 46/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0366 - accuracy: 0.9880 - dice_coef: 0.3022 - val_loss: 0.0578 - val_accuracy: 0.9818 - val_dice_coef: 0.2259

Epoch 00046: val_dice_coef did not improve from 0.24372
Epoch 47/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0354 - accuracy: 0.9883 - dice_coef: 0.3237 - val_loss: 0.0574 - val_accuracy: 0.9829 - val_dice_coef: 0.2281

Epoch 00047: val_dice_coef did not improve from 0.24372
Epoch 48/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0317 - accuracy: 0.9891 - dice_coef: 0.3754 - val_loss: 0.0580 - val_accuracy: 0.9836 - val_dice_coef: 0.2297

Epoch 00048: val_dice_coef did not improve from 0.24372
Epoch 49/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0302 - accuracy: 0.9896 - dice_coef: 0.3973 - val_loss: 0.0563 - val_accuracy: 0.9833 - val_dice_coef: 0.2347

Epoch 00049: val_dice_coef did not improve from 0.24372
Epoch 50/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0288 - accuracy: 0.9901 - dice_coef: 0.4216 - val_loss: 0.0564 - val_accuracy: 0.9844 - val_dice_coef: 0.2171

Epoch 00050: val_dice_coef did not improve from 0.24372
Epoch 51/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0285 - accuracy: 0.9900 - dice_coef: 0.4203 - val_loss: 0.0560 - val_accuracy: 0.9849 - val_dice_coef: 0.2209

Epoch 00051: val_dice_coef did not improve from 0.24372
Epoch 52/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0294 - accuracy: 0.9896 - dice_coef: 0.4062 - val_loss: 0.0568 - val_accuracy: 0.9850 - val_dice_coef: 0.2271

Epoch 00052: val_dice_coef did not improve from 0.24372
Epoch 53/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0277 - accuracy: 0.9903 - dice_coef: 0.4343 - val_loss: 0.0560 - val_accuracy: 0.9842 - val_dice_coef: 0.2182

Epoch 00053: val_dice_coef did not improve from 0.24372
Epoch 54/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0282 - accuracy: 0.9901 - dice_coef: 0.4309 - val_loss: 0.0551 - val_accuracy: 0.9847 - val_dice_coef: 0.2282

Epoch 00054: val_dice_coef did not improve from 0.24372
Epoch 55/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0252 - accuracy: 0.9910 - dice_coef: 0.4769 - val_loss: 0.0590 - val_accuracy: 0.9848 - val_dice_coef: 0.2224

Epoch 00055: val_dice_coef did not improve from 0.24372
Epoch 56/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0263 - accuracy: 0.9908 - dice_coef: 0.4647 - val_loss: 0.0572 - val_accuracy: 0.9853 - val_dice_coef: 0.2183

Epoch 00056: val_dice_coef did not improve from 0.24372
Epoch 57/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0251 - accuracy: 0.9910 - dice_coef: 0.4807 - val_loss: 0.0604 - val_accuracy: 0.9838 - val_dice_coef: 0.2428

Epoch 00057: val_dice_coef did not improve from 0.24372
Epoch 58/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0243 - accuracy: 0.9913 - dice_coef: 0.4978 - val_loss: 0.0614 - val_accuracy: 0.9845 - val_dice_coef: 0.2441

Epoch 00058: val_dice_coef improved from 0.24372 to 0.24406, saving model to model_save/simple_unet_weights-58-0.2441.hdf5
Epoch 59/75
60/60 [==============================] - 10s 166ms/step - loss: 0.0234 - accuracy: 0.9915 - dice_coef: 0.5194 - val_loss: 0.0599 - val_accuracy: 0.9841 - val_dice_coef: 0.2480

Epoch 00059: val_dice_coef improved from 0.24406 to 0.24804, saving model to model_save/simple_unet_weights-59-0.2480.hdf5
Epoch 60/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0227 - accuracy: 0.9917 - dice_coef: 0.5287 - val_loss: 0.0638 - val_accuracy: 0.9852 - val_dice_coef: 0.2238

Epoch 00060: val_dice_coef did not improve from 0.24804
Epoch 61/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0228 - accuracy: 0.9916 - dice_coef: 0.5282 - val_loss: 0.0611 - val_accuracy: 0.9852 - val_dice_coef: 0.2326

Epoch 00061: val_dice_coef did not improve from 0.24804
Epoch 62/75
60/60 [==============================] - 10s 165ms/step - loss: 0.0226 - accuracy: 0.9918 - dice_coef: 0.5301 - val_loss: 0.0630 - val_accuracy: 0.9844 - val_dice_coef: 0.2397

Epoch 00062: val_dice_coef did not improve from 0.24804
Epoch 63/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0227 - accuracy: 0.9918 - dice_coef: 0.5277 - val_loss: 0.0645 - val_accuracy: 0.9845 - val_dice_coef: 0.2393

Epoch 00063: val_dice_coef did not improve from 0.24804
Epoch 64/75
60/60 [==============================] - 10s 167ms/step - loss: 0.0222 - accuracy: 0.9919 - dice_coef: 0.5387 - val_loss: 0.0570 - val_accuracy: 0.9853 - val_dice_coef: 0.2230

Epoch 00064: val_dice_coef did not improve from 0.24804
Epoch 65/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0223 - accuracy: 0.9919 - dice_coef: 0.5319 - val_loss: 0.0582 - val_accuracy: 0.9849 - val_dice_coef: 0.2380

Epoch 00065: val_dice_coef did not improve from 0.24804
Epoch 66/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0215 - accuracy: 0.9921 - dice_coef: 0.5473 - val_loss: 0.0645 - val_accuracy: 0.9846 - val_dice_coef: 0.2384

Epoch 00066: val_dice_coef did not improve from 0.24804
Epoch 67/75
60/60 [==============================] - 10s 170ms/step - loss: 0.0217 - accuracy: 0.9919 - dice_coef: 0.5477 - val_loss: 0.0662 - val_accuracy: 0.9852 - val_dice_coef: 0.2289

Epoch 00067: val_dice_coef did not improve from 0.24804
Epoch 68/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0221 - accuracy: 0.9917 - dice_coef: 0.5315 - val_loss: 0.0656 - val_accuracy: 0.9847 - val_dice_coef: 0.2506

Epoch 00068: val_dice_coef improved from 0.24804 to 0.25063, saving model to model_save/simple_unet_weights-68-0.2506.hdf5
Epoch 69/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0213 - accuracy: 0.9921 - dice_coef: 0.5577 - val_loss: 0.0598 - val_accuracy: 0.9844 - val_dice_coef: 0.2460

Epoch 00069: val_dice_coef did not improve from 0.25063
Epoch 70/75
60/60 [==============================] - 10s 168ms/step - loss: 0.0210 - accuracy: 0.9922 - dice_coef: 0.5605 - val_loss: 0.0603 - val_accuracy: 0.9848 - val_dice_coef: 0.2509

Epoch 00070: val_dice_coef improved from 0.25063 to 0.25089, saving model to model_save/simple_unet_weights-70-0.2509.hdf5
Epoch 71/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0203 - accuracy: 0.9924 - dice_coef: 0.5683 - val_loss: 0.0669 - val_accuracy: 0.9854 - val_dice_coef: 0.2472

Epoch 00071: val_dice_coef did not improve from 0.25089
Epoch 72/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0195 - accuracy: 0.9927 - dice_coef: 0.5850 - val_loss: 0.0792 - val_accuracy: 0.9854 - val_dice_coef: 0.2288

Epoch 00072: val_dice_coef did not improve from 0.25089
Epoch 73/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0188 - accuracy: 0.9929 - dice_coef: 0.6023 - val_loss: 0.0784 - val_accuracy: 0.9860 - val_dice_coef: 0.1897

Epoch 00073: val_dice_coef did not improve from 0.25089
Epoch 74/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0196 - accuracy: 0.9927 - dice_coef: 0.5885 - val_loss: 0.0717 - val_accuracy: 0.9853 - val_dice_coef: 0.2268

Epoch 00074: val_dice_coef did not improve from 0.25089
Epoch 75/75
60/60 [==============================] - 10s 169ms/step - loss: 0.0190 - accuracy: 0.9928 - dice_coef: 0.6019 - val_loss: 0.0777 - val_accuracy: 0.9853 - val_dice_coef: 0.2193

Epoch 00075: val_dice_coef did not improve from 0.25089
In [33]:
from IPython.display import Image
Image(url='https://imgur.com/akazboj.png')                                      
Out[33]:
In [34]:
_ = model.evaluate(train_ds)
_ = model.evaluate(val_ds)
60/60 [==============================] - 3s 49ms/step - loss: 0.0165 - accuracy: 0.9936 - dice_coef: 0.6358
15/15 [==============================] - 1s 48ms/step - loss: 0.0777 - accuracy: 0.9853 - dice_coef: 0.2193
In [35]:
model.save('/content/model_save/simple_unet_weights-70-0.2509.hdf5')
In [36]:
model.load_weights('/content/model_save/simple_unet_weights-70-0.2509.hdf5')
for i,j in val_ds.take(20):
  a=model.predict(i)
  preds_val_t = (a[0] > 0.5).astype(np.uint8)
  plt.figure(figsize=(20,6))
  plt.subplot(131)
  plt.title("original image")
  plt.imshow(np.squeeze(i[0]),cmap='gray')
  plt.subplot(132)
  plt.title("ground truth")
  plt.imshow(np.squeeze(j[0]),cmap='gray')
  plt.subplot(133)
  plt.title("predicted mask")
  plt.imshow(np.squeeze(preds_val_t).astype(np.uint8),cmap='gray')
  plt.show()
In [38]:
model.save('/content/drive/My Drive/siim-acr-pneumothorax/pneumothorax/simple_unet_weights-70-0.2509.hdf5')

B) Unet model with Densenet121 as backbone

In [23]:
dense_net_121 = tf.keras.applications.DenseNet121(input_shape=[256,256,3],include_top=False,pooling ='avg')
base_model_output = tf.keras.layers.Dense(units=14,activation='relu')(dense_net_121.output)
base_model = Model(inputs = dense_net_121.input,outputs=base_model_output)
base_model.load_weights('/content/drive/MyDrive/JC_densenet121_Keras_0.3.0_weights.h5')
output_layer = tf.keras.layers.Dense(1,activation='sigmoid')(base_model.layers[-2].output)
model = Model(inputs=base_model.inputs, outputs=output_layer)
model1=tf.keras.layers.UpSampling2D((2,2))(model.layers[-3].output)
model1=tf.keras.layers.concatenate([model1,model.get_layer('pool4_conv').output])
model1=tf.keras.layers.Conv2D(256,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(256,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.UpSampling2D((2,2))(model1)
model1=tf.keras.layers.concatenate([model1,model.get_layer('pool3_conv').output])
model1=tf.keras.layers.Conv2D(128,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(128,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.UpSampling2D((2,2))(model1)
model1=tf.keras.layers.concatenate([model1,model.get_layer('pool2_conv').output])
model1=tf.keras.layers.Conv2D(64,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(64,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.UpSampling2D((2,2))(model1)
model1=tf.keras.layers.concatenate([model1,model.get_layer('conv1/relu').output])
model1=tf.keras.layers.Conv2D(32,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(32,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.UpSampling2D((2,2))(model1)
model1=tf.keras.layers.Conv2D(16,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(16,(3,3),padding='same',use_bias=False,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.BatchNormalization()(model1)
model1=tf.keras.layers.Activation('relu')(model1)
model1=tf.keras.layers.Conv2D(1,(3,3),padding='same',use_bias=True,kernel_initializer='glorot_uniform')(model1)
model1=tf.keras.layers.Activation('sigmoid')(model1)
unet_densenet_model=Model(inputs=model.inputs, outputs=model1)
unet_densenet_model.compile(optimizer='adam', loss='binary_crossentropy',metrics=['accuracy',dice_coef])
unet_densenet_model.summary()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/densenet/densenet121_weights_tf_dim_ordering_tf_kernels_notop.h5
29089792/29084464 [==============================] - 0s 0us/step
Model: "model_2"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 256, 256, 3) 0                                            
__________________________________________________________________________________________________
zero_padding2d (ZeroPadding2D)  (None, 262, 262, 3)  0           input_1[0][0]                    
__________________________________________________________________________________________________
conv1/conv (Conv2D)             (None, 128, 128, 64) 9408        zero_padding2d[0][0]             
__________________________________________________________________________________________________
conv1/bn (BatchNormalization)   (None, 128, 128, 64) 256         conv1/conv[0][0]                 
__________________________________________________________________________________________________
conv1/relu (Activation)         (None, 128, 128, 64) 0           conv1/bn[0][0]                   
__________________________________________________________________________________________________
zero_padding2d_1 (ZeroPadding2D (None, 130, 130, 64) 0           conv1/relu[0][0]                 
__________________________________________________________________________________________________
pool1 (MaxPooling2D)            (None, 64, 64, 64)   0           zero_padding2d_1[0][0]           
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 64, 64, 64)   256         pool1[0][0]                      
__________________________________________________________________________________________________
conv2_block1_0_relu (Activation (None, 64, 64, 64)   0           conv2_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D)    (None, 64, 64, 128)  8192        conv2_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 64, 64, 128)  0           conv2_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_concat (Concatenat (None, 64, 64, 96)   0           pool1[0][0]                      
                                                                 conv2_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_0_bn (BatchNormali (None, 64, 64, 96)   384         conv2_block1_concat[0][0]        
__________________________________________________________________________________________________
conv2_block2_0_relu (Activation (None, 64, 64, 96)   0           conv2_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D)    (None, 64, 64, 128)  12288       conv2_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 64, 64, 128)  0           conv2_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_concat (Concatenat (None, 64, 64, 128)  0           conv2_block1_concat[0][0]        
                                                                 conv2_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_0_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block2_concat[0][0]        
__________________________________________________________________________________________________
conv2_block3_0_relu (Activation (None, 64, 64, 128)  0           conv2_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D)    (None, 64, 64, 128)  16384       conv2_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 64, 64, 128)  0           conv2_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_concat (Concatenat (None, 64, 64, 160)  0           conv2_block2_concat[0][0]        
                                                                 conv2_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block4_0_bn (BatchNormali (None, 64, 64, 160)  640         conv2_block3_concat[0][0]        
__________________________________________________________________________________________________
conv2_block4_0_relu (Activation (None, 64, 64, 160)  0           conv2_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block4_1_conv (Conv2D)    (None, 64, 64, 128)  20480       conv2_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block4_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block4_1_relu (Activation (None, 64, 64, 128)  0           conv2_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block4_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block4_concat (Concatenat (None, 64, 64, 192)  0           conv2_block3_concat[0][0]        
                                                                 conv2_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block5_0_bn (BatchNormali (None, 64, 64, 192)  768         conv2_block4_concat[0][0]        
__________________________________________________________________________________________________
conv2_block5_0_relu (Activation (None, 64, 64, 192)  0           conv2_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block5_1_conv (Conv2D)    (None, 64, 64, 128)  24576       conv2_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block5_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block5_1_relu (Activation (None, 64, 64, 128)  0           conv2_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block5_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block5_concat (Concatenat (None, 64, 64, 224)  0           conv2_block4_concat[0][0]        
                                                                 conv2_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block6_0_bn (BatchNormali (None, 64, 64, 224)  896         conv2_block5_concat[0][0]        
__________________________________________________________________________________________________
conv2_block6_0_relu (Activation (None, 64, 64, 224)  0           conv2_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block6_1_conv (Conv2D)    (None, 64, 64, 128)  28672       conv2_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block6_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block6_1_relu (Activation (None, 64, 64, 128)  0           conv2_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block6_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block6_concat (Concatenat (None, 64, 64, 256)  0           conv2_block5_concat[0][0]        
                                                                 conv2_block6_2_conv[0][0]        
__________________________________________________________________________________________________
pool2_bn (BatchNormalization)   (None, 64, 64, 256)  1024        conv2_block6_concat[0][0]        
__________________________________________________________________________________________________
pool2_relu (Activation)         (None, 64, 64, 256)  0           pool2_bn[0][0]                   
__________________________________________________________________________________________________
pool2_conv (Conv2D)             (None, 64, 64, 128)  32768       pool2_relu[0][0]                 
__________________________________________________________________________________________________
pool2_pool (AveragePooling2D)   (None, 32, 32, 128)  0           pool2_conv[0][0]                 
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 32, 32, 128)  512         pool2_pool[0][0]                 
__________________________________________________________________________________________________
conv3_block1_0_relu (Activation (None, 32, 32, 128)  0           conv3_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D)    (None, 32, 32, 128)  16384       conv3_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 32, 32, 128)  0           conv3_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_concat (Concatenat (None, 32, 32, 160)  0           pool2_pool[0][0]                 
                                                                 conv3_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_0_bn (BatchNormali (None, 32, 32, 160)  640         conv3_block1_concat[0][0]        
__________________________________________________________________________________________________
conv3_block2_0_relu (Activation (None, 32, 32, 160)  0           conv3_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D)    (None, 32, 32, 128)  20480       conv3_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 32, 32, 128)  0           conv3_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_concat (Concatenat (None, 32, 32, 192)  0           conv3_block1_concat[0][0]        
                                                                 conv3_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_0_bn (BatchNormali (None, 32, 32, 192)  768         conv3_block2_concat[0][0]        
__________________________________________________________________________________________________
conv3_block3_0_relu (Activation (None, 32, 32, 192)  0           conv3_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D)    (None, 32, 32, 128)  24576       conv3_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 32, 32, 128)  0           conv3_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_concat (Concatenat (None, 32, 32, 224)  0           conv3_block2_concat[0][0]        
                                                                 conv3_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_0_bn (BatchNormali (None, 32, 32, 224)  896         conv3_block3_concat[0][0]        
__________________________________________________________________________________________________
conv3_block4_0_relu (Activation (None, 32, 32, 224)  0           conv3_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D)    (None, 32, 32, 128)  28672       conv3_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 32, 32, 128)  0           conv3_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_concat (Concatenat (None, 32, 32, 256)  0           conv3_block3_concat[0][0]        
                                                                 conv3_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_0_bn (BatchNormali (None, 32, 32, 256)  1024        conv3_block4_concat[0][0]        
__________________________________________________________________________________________________
conv3_block5_0_relu (Activation (None, 32, 32, 256)  0           conv3_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D)    (None, 32, 32, 128)  32768       conv3_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 32, 32, 128)  0           conv3_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_concat (Concatenat (None, 32, 32, 288)  0           conv3_block4_concat[0][0]        
                                                                 conv3_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_0_bn (BatchNormali (None, 32, 32, 288)  1152        conv3_block5_concat[0][0]        
__________________________________________________________________________________________________
conv3_block6_0_relu (Activation (None, 32, 32, 288)  0           conv3_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D)    (None, 32, 32, 128)  36864       conv3_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 32, 32, 128)  0           conv3_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_concat (Concatenat (None, 32, 32, 320)  0           conv3_block5_concat[0][0]        
                                                                 conv3_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_0_bn (BatchNormali (None, 32, 32, 320)  1280        conv3_block6_concat[0][0]        
__________________________________________________________________________________________________
conv3_block7_0_relu (Activation (None, 32, 32, 320)  0           conv3_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D)    (None, 32, 32, 128)  40960       conv3_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 32, 32, 128)  0           conv3_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_concat (Concatenat (None, 32, 32, 352)  0           conv3_block6_concat[0][0]        
                                                                 conv3_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_0_bn (BatchNormali (None, 32, 32, 352)  1408        conv3_block7_concat[0][0]        
__________________________________________________________________________________________________
conv3_block8_0_relu (Activation (None, 32, 32, 352)  0           conv3_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D)    (None, 32, 32, 128)  45056       conv3_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_relu (Activation (None, 32, 32, 128)  0           conv3_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_concat (Concatenat (None, 32, 32, 384)  0           conv3_block7_concat[0][0]        
                                                                 conv3_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block9_0_bn (BatchNormali (None, 32, 32, 384)  1536        conv3_block8_concat[0][0]        
__________________________________________________________________________________________________
conv3_block9_0_relu (Activation (None, 32, 32, 384)  0           conv3_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block9_1_conv (Conv2D)    (None, 32, 32, 128)  49152       conv3_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block9_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block9_1_relu (Activation (None, 32, 32, 128)  0           conv3_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block9_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block9_concat (Concatenat (None, 32, 32, 416)  0           conv3_block8_concat[0][0]        
                                                                 conv3_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block10_0_bn (BatchNormal (None, 32, 32, 416)  1664        conv3_block9_concat[0][0]        
__________________________________________________________________________________________________
conv3_block10_0_relu (Activatio (None, 32, 32, 416)  0           conv3_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block10_1_conv (Conv2D)   (None, 32, 32, 128)  53248       conv3_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block10_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block10_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block10_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block10_concat (Concatena (None, 32, 32, 448)  0           conv3_block9_concat[0][0]        
                                                                 conv3_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv3_block11_0_bn (BatchNormal (None, 32, 32, 448)  1792        conv3_block10_concat[0][0]       
__________________________________________________________________________________________________
conv3_block11_0_relu (Activatio (None, 32, 32, 448)  0           conv3_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block11_1_conv (Conv2D)   (None, 32, 32, 128)  57344       conv3_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block11_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block11_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block11_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block11_concat (Concatena (None, 32, 32, 480)  0           conv3_block10_concat[0][0]       
                                                                 conv3_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv3_block12_0_bn (BatchNormal (None, 32, 32, 480)  1920        conv3_block11_concat[0][0]       
__________________________________________________________________________________________________
conv3_block12_0_relu (Activatio (None, 32, 32, 480)  0           conv3_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block12_1_conv (Conv2D)   (None, 32, 32, 128)  61440       conv3_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block12_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block12_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block12_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block12_concat (Concatena (None, 32, 32, 512)  0           conv3_block11_concat[0][0]       
                                                                 conv3_block12_2_conv[0][0]       
__________________________________________________________________________________________________
pool3_bn (BatchNormalization)   (None, 32, 32, 512)  2048        conv3_block12_concat[0][0]       
__________________________________________________________________________________________________
pool3_relu (Activation)         (None, 32, 32, 512)  0           pool3_bn[0][0]                   
__________________________________________________________________________________________________
pool3_conv (Conv2D)             (None, 32, 32, 256)  131072      pool3_relu[0][0]                 
__________________________________________________________________________________________________
pool3_pool (AveragePooling2D)   (None, 16, 16, 256)  0           pool3_conv[0][0]                 
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, 16, 16, 256)  1024        pool3_pool[0][0]                 
__________________________________________________________________________________________________
conv4_block1_0_relu (Activation (None, 16, 16, 256)  0           conv4_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D)    (None, 16, 16, 128)  32768       conv4_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 16, 16, 128)  0           conv4_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_concat (Concatenat (None, 16, 16, 288)  0           pool3_pool[0][0]                 
                                                                 conv4_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_0_bn (BatchNormali (None, 16, 16, 288)  1152        conv4_block1_concat[0][0]        
__________________________________________________________________________________________________
conv4_block2_0_relu (Activation (None, 16, 16, 288)  0           conv4_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D)    (None, 16, 16, 128)  36864       conv4_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 16, 16, 128)  0           conv4_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_concat (Concatenat (None, 16, 16, 320)  0           conv4_block1_concat[0][0]        
                                                                 conv4_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_0_bn (BatchNormali (None, 16, 16, 320)  1280        conv4_block2_concat[0][0]        
__________________________________________________________________________________________________
conv4_block3_0_relu (Activation (None, 16, 16, 320)  0           conv4_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D)    (None, 16, 16, 128)  40960       conv4_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 16, 16, 128)  0           conv4_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_concat (Concatenat (None, 16, 16, 352)  0           conv4_block2_concat[0][0]        
                                                                 conv4_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_0_bn (BatchNormali (None, 16, 16, 352)  1408        conv4_block3_concat[0][0]        
__________________________________________________________________________________________________
conv4_block4_0_relu (Activation (None, 16, 16, 352)  0           conv4_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D)    (None, 16, 16, 128)  45056       conv4_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 16, 16, 128)  0           conv4_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_concat (Concatenat (None, 16, 16, 384)  0           conv4_block3_concat[0][0]        
                                                                 conv4_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_0_bn (BatchNormali (None, 16, 16, 384)  1536        conv4_block4_concat[0][0]        
__________________________________________________________________________________________________
conv4_block5_0_relu (Activation (None, 16, 16, 384)  0           conv4_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D)    (None, 16, 16, 128)  49152       conv4_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 16, 16, 128)  0           conv4_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_concat (Concatenat (None, 16, 16, 416)  0           conv4_block4_concat[0][0]        
                                                                 conv4_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_0_bn (BatchNormali (None, 16, 16, 416)  1664        conv4_block5_concat[0][0]        
__________________________________________________________________________________________________
conv4_block6_0_relu (Activation (None, 16, 16, 416)  0           conv4_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D)    (None, 16, 16, 128)  53248       conv4_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 16, 16, 128)  0           conv4_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_concat (Concatenat (None, 16, 16, 448)  0           conv4_block5_concat[0][0]        
                                                                 conv4_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_0_bn (BatchNormali (None, 16, 16, 448)  1792        conv4_block6_concat[0][0]        
__________________________________________________________________________________________________
conv4_block7_0_relu (Activation (None, 16, 16, 448)  0           conv4_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_1_conv (Conv2D)    (None, 16, 16, 128)  57344       conv4_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_relu (Activation (None, 16, 16, 128)  0           conv4_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_concat (Concatenat (None, 16, 16, 480)  0           conv4_block6_concat[0][0]        
                                                                 conv4_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_0_bn (BatchNormali (None, 16, 16, 480)  1920        conv4_block7_concat[0][0]        
__________________________________________________________________________________________________
conv4_block8_0_relu (Activation (None, 16, 16, 480)  0           conv4_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_1_conv (Conv2D)    (None, 16, 16, 128)  61440       conv4_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_relu (Activation (None, 16, 16, 128)  0           conv4_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_concat (Concatenat (None, 16, 16, 512)  0           conv4_block7_concat[0][0]        
                                                                 conv4_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_0_bn (BatchNormali (None, 16, 16, 512)  2048        conv4_block8_concat[0][0]        
__________________________________________________________________________________________________
conv4_block9_0_relu (Activation (None, 16, 16, 512)  0           conv4_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_1_conv (Conv2D)    (None, 16, 16, 128)  65536       conv4_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_relu (Activation (None, 16, 16, 128)  0           conv4_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_concat (Concatenat (None, 16, 16, 544)  0           conv4_block8_concat[0][0]        
                                                                 conv4_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block10_0_bn (BatchNormal (None, 16, 16, 544)  2176        conv4_block9_concat[0][0]        
__________________________________________________________________________________________________
conv4_block10_0_relu (Activatio (None, 16, 16, 544)  0           conv4_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_1_conv (Conv2D)   (None, 16, 16, 128)  69632       conv4_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_concat (Concatena (None, 16, 16, 576)  0           conv4_block9_concat[0][0]        
                                                                 conv4_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_0_bn (BatchNormal (None, 16, 16, 576)  2304        conv4_block10_concat[0][0]       
__________________________________________________________________________________________________
conv4_block11_0_relu (Activatio (None, 16, 16, 576)  0           conv4_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_1_conv (Conv2D)   (None, 16, 16, 128)  73728       conv4_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_concat (Concatena (None, 16, 16, 608)  0           conv4_block10_concat[0][0]       
                                                                 conv4_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_0_bn (BatchNormal (None, 16, 16, 608)  2432        conv4_block11_concat[0][0]       
__________________________________________________________________________________________________
conv4_block12_0_relu (Activatio (None, 16, 16, 608)  0           conv4_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_1_conv (Conv2D)   (None, 16, 16, 128)  77824       conv4_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_concat (Concatena (None, 16, 16, 640)  0           conv4_block11_concat[0][0]       
                                                                 conv4_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_0_bn (BatchNormal (None, 16, 16, 640)  2560        conv4_block12_concat[0][0]       
__________________________________________________________________________________________________
conv4_block13_0_relu (Activatio (None, 16, 16, 640)  0           conv4_block13_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_1_conv (Conv2D)   (None, 16, 16, 128)  81920       conv4_block13_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_concat (Concatena (None, 16, 16, 672)  0           conv4_block12_concat[0][0]       
                                                                 conv4_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_0_bn (BatchNormal (None, 16, 16, 672)  2688        conv4_block13_concat[0][0]       
__________________________________________________________________________________________________
conv4_block14_0_relu (Activatio (None, 16, 16, 672)  0           conv4_block14_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_1_conv (Conv2D)   (None, 16, 16, 128)  86016       conv4_block14_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_concat (Concatena (None, 16, 16, 704)  0           conv4_block13_concat[0][0]       
                                                                 conv4_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_0_bn (BatchNormal (None, 16, 16, 704)  2816        conv4_block14_concat[0][0]       
__________________________________________________________________________________________________
conv4_block15_0_relu (Activatio (None, 16, 16, 704)  0           conv4_block15_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_1_conv (Conv2D)   (None, 16, 16, 128)  90112       conv4_block15_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_concat (Concatena (None, 16, 16, 736)  0           conv4_block14_concat[0][0]       
                                                                 conv4_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_0_bn (BatchNormal (None, 16, 16, 736)  2944        conv4_block15_concat[0][0]       
__________________________________________________________________________________________________
conv4_block16_0_relu (Activatio (None, 16, 16, 736)  0           conv4_block16_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_1_conv (Conv2D)   (None, 16, 16, 128)  94208       conv4_block16_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_concat (Concatena (None, 16, 16, 768)  0           conv4_block15_concat[0][0]       
                                                                 conv4_block16_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_0_bn (BatchNormal (None, 16, 16, 768)  3072        conv4_block16_concat[0][0]       
__________________________________________________________________________________________________
conv4_block17_0_relu (Activatio (None, 16, 16, 768)  0           conv4_block17_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_1_conv (Conv2D)   (None, 16, 16, 128)  98304       conv4_block17_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block17_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block17_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block17_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_concat (Concatena (None, 16, 16, 800)  0           conv4_block16_concat[0][0]       
                                                                 conv4_block17_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_0_bn (BatchNormal (None, 16, 16, 800)  3200        conv4_block17_concat[0][0]       
__________________________________________________________________________________________________
conv4_block18_0_relu (Activatio (None, 16, 16, 800)  0           conv4_block18_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_1_conv (Conv2D)   (None, 16, 16, 128)  102400      conv4_block18_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block18_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block18_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block18_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_concat (Concatena (None, 16, 16, 832)  0           conv4_block17_concat[0][0]       
                                                                 conv4_block18_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_0_bn (BatchNormal (None, 16, 16, 832)  3328        conv4_block18_concat[0][0]       
__________________________________________________________________________________________________
conv4_block19_0_relu (Activatio (None, 16, 16, 832)  0           conv4_block19_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_1_conv (Conv2D)   (None, 16, 16, 128)  106496      conv4_block19_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block19_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block19_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block19_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_concat (Concatena (None, 16, 16, 864)  0           conv4_block18_concat[0][0]       
                                                                 conv4_block19_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_0_bn (BatchNormal (None, 16, 16, 864)  3456        conv4_block19_concat[0][0]       
__________________________________________________________________________________________________
conv4_block20_0_relu (Activatio (None, 16, 16, 864)  0           conv4_block20_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_1_conv (Conv2D)   (None, 16, 16, 128)  110592      conv4_block20_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block20_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block20_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block20_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_concat (Concatena (None, 16, 16, 896)  0           conv4_block19_concat[0][0]       
                                                                 conv4_block20_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_0_bn (BatchNormal (None, 16, 16, 896)  3584        conv4_block20_concat[0][0]       
__________________________________________________________________________________________________
conv4_block21_0_relu (Activatio (None, 16, 16, 896)  0           conv4_block21_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_1_conv (Conv2D)   (None, 16, 16, 128)  114688      conv4_block21_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block21_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block21_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block21_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_concat (Concatena (None, 16, 16, 928)  0           conv4_block20_concat[0][0]       
                                                                 conv4_block21_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_0_bn (BatchNormal (None, 16, 16, 928)  3712        conv4_block21_concat[0][0]       
__________________________________________________________________________________________________
conv4_block22_0_relu (Activatio (None, 16, 16, 928)  0           conv4_block22_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_1_conv (Conv2D)   (None, 16, 16, 128)  118784      conv4_block22_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block22_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block22_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block22_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_concat (Concatena (None, 16, 16, 960)  0           conv4_block21_concat[0][0]       
                                                                 conv4_block22_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_0_bn (BatchNormal (None, 16, 16, 960)  3840        conv4_block22_concat[0][0]       
__________________________________________________________________________________________________
conv4_block23_0_relu (Activatio (None, 16, 16, 960)  0           conv4_block23_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_1_conv (Conv2D)   (None, 16, 16, 128)  122880      conv4_block23_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block23_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block23_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block23_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_concat (Concatena (None, 16, 16, 992)  0           conv4_block22_concat[0][0]       
                                                                 conv4_block23_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_0_bn (BatchNormal (None, 16, 16, 992)  3968        conv4_block23_concat[0][0]       
__________________________________________________________________________________________________
conv4_block24_0_relu (Activatio (None, 16, 16, 992)  0           conv4_block24_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_1_conv (Conv2D)   (None, 16, 16, 128)  126976      conv4_block24_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block24_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block24_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block24_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_concat (Concatena (None, 16, 16, 1024) 0           conv4_block23_concat[0][0]       
                                                                 conv4_block24_2_conv[0][0]       
__________________________________________________________________________________________________
pool4_bn (BatchNormalization)   (None, 16, 16, 1024) 4096        conv4_block24_concat[0][0]       
__________________________________________________________________________________________________
pool4_relu (Activation)         (None, 16, 16, 1024) 0           pool4_bn[0][0]                   
__________________________________________________________________________________________________
pool4_conv (Conv2D)             (None, 16, 16, 512)  524288      pool4_relu[0][0]                 
__________________________________________________________________________________________________
pool4_pool (AveragePooling2D)   (None, 8, 8, 512)    0           pool4_conv[0][0]                 
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, 8, 8, 512)    2048        pool4_pool[0][0]                 
__________________________________________________________________________________________________
conv5_block1_0_relu (Activation (None, 8, 8, 512)    0           conv5_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D)    (None, 8, 8, 128)    65536       conv5_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 8, 8, 128)    0           conv5_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_concat (Concatenat (None, 8, 8, 544)    0           pool4_pool[0][0]                 
                                                                 conv5_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_0_bn (BatchNormali (None, 8, 8, 544)    2176        conv5_block1_concat[0][0]        
__________________________________________________________________________________________________
conv5_block2_0_relu (Activation (None, 8, 8, 544)    0           conv5_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D)    (None, 8, 8, 128)    69632       conv5_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 8, 8, 128)    0           conv5_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_concat (Concatenat (None, 8, 8, 576)    0           conv5_block1_concat[0][0]        
                                                                 conv5_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_0_bn (BatchNormali (None, 8, 8, 576)    2304        conv5_block2_concat[0][0]        
__________________________________________________________________________________________________
conv5_block3_0_relu (Activation (None, 8, 8, 576)    0           conv5_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D)    (None, 8, 8, 128)    73728       conv5_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 8, 8, 128)    0           conv5_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_concat (Concatenat (None, 8, 8, 608)    0           conv5_block2_concat[0][0]        
                                                                 conv5_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block4_0_bn (BatchNormali (None, 8, 8, 608)    2432        conv5_block3_concat[0][0]        
__________________________________________________________________________________________________
conv5_block4_0_relu (Activation (None, 8, 8, 608)    0           conv5_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block4_1_conv (Conv2D)    (None, 8, 8, 128)    77824       conv5_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block4_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block4_1_relu (Activation (None, 8, 8, 128)    0           conv5_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block4_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block4_concat (Concatenat (None, 8, 8, 640)    0           conv5_block3_concat[0][0]        
                                                                 conv5_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block5_0_bn (BatchNormali (None, 8, 8, 640)    2560        conv5_block4_concat[0][0]        
__________________________________________________________________________________________________
conv5_block5_0_relu (Activation (None, 8, 8, 640)    0           conv5_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block5_1_conv (Conv2D)    (None, 8, 8, 128)    81920       conv5_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block5_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block5_1_relu (Activation (None, 8, 8, 128)    0           conv5_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block5_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block5_concat (Concatenat (None, 8, 8, 672)    0           conv5_block4_concat[0][0]        
                                                                 conv5_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block6_0_bn (BatchNormali (None, 8, 8, 672)    2688        conv5_block5_concat[0][0]        
__________________________________________________________________________________________________
conv5_block6_0_relu (Activation (None, 8, 8, 672)    0           conv5_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block6_1_conv (Conv2D)    (None, 8, 8, 128)    86016       conv5_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block6_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block6_1_relu (Activation (None, 8, 8, 128)    0           conv5_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block6_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block6_concat (Concatenat (None, 8, 8, 704)    0           conv5_block5_concat[0][0]        
                                                                 conv5_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block7_0_bn (BatchNormali (None, 8, 8, 704)    2816        conv5_block6_concat[0][0]        
__________________________________________________________________________________________________
conv5_block7_0_relu (Activation (None, 8, 8, 704)    0           conv5_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block7_1_conv (Conv2D)    (None, 8, 8, 128)    90112       conv5_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block7_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block7_1_relu (Activation (None, 8, 8, 128)    0           conv5_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block7_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block7_concat (Concatenat (None, 8, 8, 736)    0           conv5_block6_concat[0][0]        
                                                                 conv5_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block8_0_bn (BatchNormali (None, 8, 8, 736)    2944        conv5_block7_concat[0][0]        
__________________________________________________________________________________________________
conv5_block8_0_relu (Activation (None, 8, 8, 736)    0           conv5_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block8_1_conv (Conv2D)    (None, 8, 8, 128)    94208       conv5_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block8_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block8_1_relu (Activation (None, 8, 8, 128)    0           conv5_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block8_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block8_concat (Concatenat (None, 8, 8, 768)    0           conv5_block7_concat[0][0]        
                                                                 conv5_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block9_0_bn (BatchNormali (None, 8, 8, 768)    3072        conv5_block8_concat[0][0]        
__________________________________________________________________________________________________
conv5_block9_0_relu (Activation (None, 8, 8, 768)    0           conv5_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block9_1_conv (Conv2D)    (None, 8, 8, 128)    98304       conv5_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block9_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block9_1_relu (Activation (None, 8, 8, 128)    0           conv5_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block9_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block9_concat (Concatenat (None, 8, 8, 800)    0           conv5_block8_concat[0][0]        
                                                                 conv5_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block10_0_bn (BatchNormal (None, 8, 8, 800)    3200        conv5_block9_concat[0][0]        
__________________________________________________________________________________________________
conv5_block10_0_relu (Activatio (None, 8, 8, 800)    0           conv5_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block10_1_conv (Conv2D)   (None, 8, 8, 128)    102400      conv5_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block10_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block10_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block10_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block10_concat (Concatena (None, 8, 8, 832)    0           conv5_block9_concat[0][0]        
                                                                 conv5_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block11_0_bn (BatchNormal (None, 8, 8, 832)    3328        conv5_block10_concat[0][0]       
__________________________________________________________________________________________________
conv5_block11_0_relu (Activatio (None, 8, 8, 832)    0           conv5_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block11_1_conv (Conv2D)   (None, 8, 8, 128)    106496      conv5_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block11_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block11_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block11_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block11_concat (Concatena (None, 8, 8, 864)    0           conv5_block10_concat[0][0]       
                                                                 conv5_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block12_0_bn (BatchNormal (None, 8, 8, 864)    3456        conv5_block11_concat[0][0]       
__________________________________________________________________________________________________
conv5_block12_0_relu (Activatio (None, 8, 8, 864)    0           conv5_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block12_1_conv (Conv2D)   (None, 8, 8, 128)    110592      conv5_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block12_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block12_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block12_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block12_concat (Concatena (None, 8, 8, 896)    0           conv5_block11_concat[0][0]       
                                                                 conv5_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block13_0_bn (BatchNormal (None, 8, 8, 896)    3584        conv5_block12_concat[0][0]       
__________________________________________________________________________________________________
conv5_block13_0_relu (Activatio (None, 8, 8, 896)    0           conv5_block13_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block13_1_conv (Conv2D)   (None, 8, 8, 128)    114688      conv5_block13_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block13_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block13_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block13_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block13_concat (Concatena (None, 8, 8, 928)    0           conv5_block12_concat[0][0]       
                                                                 conv5_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block14_0_bn (BatchNormal (None, 8, 8, 928)    3712        conv5_block13_concat[0][0]       
__________________________________________________________________________________________________
conv5_block14_0_relu (Activatio (None, 8, 8, 928)    0           conv5_block14_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block14_1_conv (Conv2D)   (None, 8, 8, 128)    118784      conv5_block14_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block14_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block14_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block14_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block14_concat (Concatena (None, 8, 8, 960)    0           conv5_block13_concat[0][0]       
                                                                 conv5_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block15_0_bn (BatchNormal (None, 8, 8, 960)    3840        conv5_block14_concat[0][0]       
__________________________________________________________________________________________________
conv5_block15_0_relu (Activatio (None, 8, 8, 960)    0           conv5_block15_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block15_1_conv (Conv2D)   (None, 8, 8, 128)    122880      conv5_block15_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block15_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block15_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block15_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block15_concat (Concatena (None, 8, 8, 992)    0           conv5_block14_concat[0][0]       
                                                                 conv5_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block16_0_bn (BatchNormal (None, 8, 8, 992)    3968        conv5_block15_concat[0][0]       
__________________________________________________________________________________________________
conv5_block16_0_relu (Activatio (None, 8, 8, 992)    0           conv5_block16_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block16_1_conv (Conv2D)   (None, 8, 8, 128)    126976      conv5_block16_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block16_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block16_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block16_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block16_concat (Concatena (None, 8, 8, 1024)   0           conv5_block15_concat[0][0]       
                                                                 conv5_block16_2_conv[0][0]       
__________________________________________________________________________________________________
bn (BatchNormalization)         (None, 8, 8, 1024)   4096        conv5_block16_concat[0][0]       
__________________________________________________________________________________________________
relu (Activation)               (None, 8, 8, 1024)   0           bn[0][0]                         
__________________________________________________________________________________________________
up_sampling2d (UpSampling2D)    (None, 16, 16, 1024) 0           relu[0][0]                       
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 16, 16, 1536) 0           up_sampling2d[0][0]              
                                                                 pool4_conv[0][0]                 
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 16, 16, 256)  3538944     concatenate[0][0]                
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 16, 16, 256)  1024        conv2d[0][0]                     
__________________________________________________________________________________________________
activation (Activation)         (None, 16, 16, 256)  0           batch_normalization[0][0]        
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 16, 16, 256)  589824      activation[0][0]                 
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 16, 16, 256)  1024        conv2d_1[0][0]                   
__________________________________________________________________________________________________
activation_1 (Activation)       (None, 16, 16, 256)  0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
up_sampling2d_1 (UpSampling2D)  (None, 32, 32, 256)  0           activation_1[0][0]               
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 32, 32, 512)  0           up_sampling2d_1[0][0]            
                                                                 pool3_conv[0][0]                 
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 32, 32, 128)  589824      concatenate_1[0][0]              
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 32, 32, 128)  512         conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_2 (Activation)       (None, 32, 32, 128)  0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 32, 32, 128)  147456      activation_2[0][0]               
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 32, 32, 128)  512         conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_3 (Activation)       (None, 32, 32, 128)  0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
up_sampling2d_2 (UpSampling2D)  (None, 64, 64, 128)  0           activation_3[0][0]               
__________________________________________________________________________________________________
concatenate_2 (Concatenate)     (None, 64, 64, 256)  0           up_sampling2d_2[0][0]            
                                                                 pool2_conv[0][0]                 
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 64, 64, 64)   147456      concatenate_2[0][0]              
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 64, 64, 64)   256         conv2d_4[0][0]                   
__________________________________________________________________________________________________
activation_4 (Activation)       (None, 64, 64, 64)   0           batch_normalization_4[0][0]      
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 64, 64, 64)   36864       activation_4[0][0]               
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 64, 64, 64)   256         conv2d_5[0][0]                   
__________________________________________________________________________________________________
activation_5 (Activation)       (None, 64, 64, 64)   0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
up_sampling2d_3 (UpSampling2D)  (None, 128, 128, 64) 0           activation_5[0][0]               
__________________________________________________________________________________________________
concatenate_3 (Concatenate)     (None, 128, 128, 128 0           up_sampling2d_3[0][0]            
                                                                 conv1/relu[0][0]                 
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 128, 128, 32) 36864       concatenate_3[0][0]              
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 128, 128, 32) 128         conv2d_6[0][0]                   
__________________________________________________________________________________________________
activation_6 (Activation)       (None, 128, 128, 32) 0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 128, 128, 32) 9216        activation_6[0][0]               
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 128, 128, 32) 128         conv2d_7[0][0]                   
__________________________________________________________________________________________________
activation_7 (Activation)       (None, 128, 128, 32) 0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
up_sampling2d_4 (UpSampling2D)  (None, 256, 256, 32) 0           activation_7[0][0]               
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 256, 256, 16) 4608        up_sampling2d_4[0][0]            
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 256, 256, 16) 64          conv2d_8[0][0]                   
__________________________________________________________________________________________________
activation_8 (Activation)       (None, 256, 256, 16) 0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 256, 256, 16) 2304        activation_8[0][0]               
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 256, 256, 16) 64          conv2d_9[0][0]                   
__________________________________________________________________________________________________
activation_9 (Activation)       (None, 256, 256, 16) 0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 256, 256, 1)  145         activation_9[0][0]               
__________________________________________________________________________________________________
activation_10 (Activation)      (None, 256, 256, 1)  0           conv2d_10[0][0]                  
==================================================================================================
Total params: 12,144,977
Trainable params: 12,059,345
Non-trainable params: 85,632
__________________________________________________________________________________________________
In [24]:
from tensorflow.keras.utils import plot_model
plot_model(unet_densenet_model, 'unet_densenet_model.png', show_shapes=True)
dot: graph is too large for cairo-renderer bitmaps. Scaling by 0.633118 to fit

Out[24]:
In [25]:
BATCH_SIZE = 32
BUFFER_SIZE = 1500
SEED = 42
AUTOTUNE = tf.data.experimental.AUTOTUNE

train_ds = train_ds.shuffle(buffer_size=BUFFER_SIZE, seed=SEED)
train_ds = train_ds.cache()
train_ds = train_ds.batch(16)
train_ds = train_ds.prefetch(buffer_size=AUTOTUNE)
print(train_ds)
# Preparing the Validation Dataset
val_ds = val_ds.cache()
val_ds = val_ds.batch(16)
val_ds = val_ds.prefetch(buffer_size=AUTOTUNE)
print(val_ds)
<PrefetchDataset shapes: ((None, 256, 256, 3), (None, 256, 256, 1)), types: (tf.float32, tf.bool)>
<PrefetchDataset shapes: ((None, 256, 256, 3), (None, 256, 256, 1)), types: (tf.float32, tf.bool)>
In [30]:
# tensor-board in colab
# Refer: https://www.tensorflow.org/tensorboard/get_started
import os
import datetime

! rm -rf ./logs/ 
logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print(logdir)
logs/20210114-175459
In [31]:
import os
os.mkdir("model_save")
In [32]:
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import CSVLogger

filepath="model_save/"
checkpoints = ModelCheckpoint(filepath+'unet_with_densenet_weights-{epoch:02d}-{val_dice_coef:.4f}.hdf5', monitor='val_dice_coef', save_weights_only=True, verbose=1, save_best_only=True, mode='max')

train_log = CSVLogger(filepath+'unet_densenet01.log') #storing the training results in a pandas dataframe

tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)

callbacks_list = [checkpoints, train_log, tensorboard_callback]
In [33]:
%load_ext tensorboard
%tensorboard --logdir $logdir
The tensorboard extension is already loaded. To reload it, use:
  %reload_ext tensorboard
In [34]:
history2 = unet_densenet_model.fit(train_ds,epochs=35,batch_size=16,validation_data=val_ds,callbacks=callbacks_list)
Epoch 1/35
120/120 [==============================] - 152s 627ms/step - loss: 0.3563 - accuracy: 0.8844 - dice_coef: 0.0324 - val_loss: 0.2145 - val_accuracy: 0.9822 - val_dice_coef: 0.0484

Epoch 00001: val_dice_coef improved from -inf to 0.04837, saving model to model_save/unet_with_densenet_weights-01-0.0484.hdf5
Epoch 2/35
120/120 [==============================] - 45s 377ms/step - loss: 0.0523 - accuracy: 0.9867 - dice_coef: 0.1447 - val_loss: 0.0546 - val_accuracy: 0.9861 - val_dice_coef: 0.1947

Epoch 00002: val_dice_coef improved from 0.04837 to 0.19471, saving model to model_save/unet_with_densenet_weights-02-0.1947.hdf5
Epoch 3/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0383 - accuracy: 0.9881 - dice_coef: 0.2739 - val_loss: 0.0439 - val_accuracy: 0.9874 - val_dice_coef: 0.2510

Epoch 00003: val_dice_coef improved from 0.19471 to 0.25098, saving model to model_save/unet_with_densenet_weights-03-0.2510.hdf5
Epoch 4/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0321 - accuracy: 0.9894 - dice_coef: 0.3649 - val_loss: 0.0483 - val_accuracy: 0.9863 - val_dice_coef: 0.2549

Epoch 00004: val_dice_coef improved from 0.25098 to 0.25493, saving model to model_save/unet_with_densenet_weights-04-0.2549.hdf5
Epoch 5/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0274 - accuracy: 0.9905 - dice_coef: 0.4383 - val_loss: 0.0463 - val_accuracy: 0.9868 - val_dice_coef: 0.2780

Epoch 00005: val_dice_coef improved from 0.25493 to 0.27798, saving model to model_save/unet_with_densenet_weights-05-0.2780.hdf5
Epoch 6/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0247 - accuracy: 0.9911 - dice_coef: 0.4870 - val_loss: 0.0519 - val_accuracy: 0.9872 - val_dice_coef: 0.2517

Epoch 00006: val_dice_coef did not improve from 0.27798
Epoch 7/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0232 - accuracy: 0.9914 - dice_coef: 0.5144 - val_loss: 0.0647 - val_accuracy: 0.9842 - val_dice_coef: 0.2026

Epoch 00007: val_dice_coef did not improve from 0.27798
Epoch 8/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0217 - accuracy: 0.9918 - dice_coef: 0.5347 - val_loss: 0.0491 - val_accuracy: 0.9876 - val_dice_coef: 0.3789

Epoch 00008: val_dice_coef improved from 0.27798 to 0.37890, saving model to model_save/unet_with_densenet_weights-08-0.3789.hdf5
Epoch 9/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0202 - accuracy: 0.9922 - dice_coef: 0.5622 - val_loss: 0.0519 - val_accuracy: 0.9848 - val_dice_coef: 0.3567

Epoch 00009: val_dice_coef did not improve from 0.37890
Epoch 10/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0193 - accuracy: 0.9924 - dice_coef: 0.5785 - val_loss: 0.0513 - val_accuracy: 0.9852 - val_dice_coef: 0.3896

Epoch 00010: val_dice_coef improved from 0.37890 to 0.38956, saving model to model_save/unet_with_densenet_weights-10-0.3896.hdf5
Epoch 11/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0191 - accuracy: 0.9925 - dice_coef: 0.5875 - val_loss: 0.0511 - val_accuracy: 0.9853 - val_dice_coef: 0.3872

Epoch 00011: val_dice_coef did not improve from 0.38956
Epoch 12/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0181 - accuracy: 0.9929 - dice_coef: 0.6085 - val_loss: 0.0490 - val_accuracy: 0.9860 - val_dice_coef: 0.3956

Epoch 00012: val_dice_coef improved from 0.38956 to 0.39560, saving model to model_save/unet_with_densenet_weights-12-0.3956.hdf5
Epoch 13/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0164 - accuracy: 0.9936 - dice_coef: 0.6426 - val_loss: 0.0547 - val_accuracy: 0.9844 - val_dice_coef: 0.4111

Epoch 00013: val_dice_coef improved from 0.39560 to 0.41111, saving model to model_save/unet_with_densenet_weights-13-0.4111.hdf5
Epoch 14/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0149 - accuracy: 0.9942 - dice_coef: 0.6718 - val_loss: 0.0531 - val_accuracy: 0.9856 - val_dice_coef: 0.4252

Epoch 00014: val_dice_coef improved from 0.41111 to 0.42522, saving model to model_save/unet_with_densenet_weights-14-0.4252.hdf5
Epoch 15/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0136 - accuracy: 0.9945 - dice_coef: 0.6926 - val_loss: 0.0513 - val_accuracy: 0.9855 - val_dice_coef: 0.4144

Epoch 00015: val_dice_coef did not improve from 0.42522
Epoch 16/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0144 - accuracy: 0.9942 - dice_coef: 0.6796 - val_loss: 0.0538 - val_accuracy: 0.9868 - val_dice_coef: 0.4081

Epoch 00016: val_dice_coef did not improve from 0.42522
Epoch 17/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0130 - accuracy: 0.9948 - dice_coef: 0.7094 - val_loss: 0.0598 - val_accuracy: 0.9869 - val_dice_coef: 0.3850

Epoch 00017: val_dice_coef did not improve from 0.42522
Epoch 18/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0124 - accuracy: 0.9950 - dice_coef: 0.7242 - val_loss: 0.0629 - val_accuracy: 0.9862 - val_dice_coef: 0.3671

Epoch 00018: val_dice_coef did not improve from 0.42522
Epoch 19/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0118 - accuracy: 0.9953 - dice_coef: 0.7365 - val_loss: 0.0530 - val_accuracy: 0.9876 - val_dice_coef: 0.4327

Epoch 00019: val_dice_coef improved from 0.42522 to 0.43274, saving model to model_save/unet_with_densenet_weights-19-0.4327.hdf5
Epoch 20/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0118 - accuracy: 0.9952 - dice_coef: 0.7355 - val_loss: 0.0564 - val_accuracy: 0.9867 - val_dice_coef: 0.4372

Epoch 00020: val_dice_coef improved from 0.43274 to 0.43716, saving model to model_save/unet_with_densenet_weights-20-0.4372.hdf5
Epoch 21/35
120/120 [==============================] - 45s 374ms/step - loss: 0.0111 - accuracy: 0.9954 - dice_coef: 0.7458 - val_loss: 0.0570 - val_accuracy: 0.9855 - val_dice_coef: 0.4596

Epoch 00021: val_dice_coef improved from 0.43716 to 0.45955, saving model to model_save/unet_with_densenet_weights-21-0.4596.hdf5
Epoch 22/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0111 - accuracy: 0.9954 - dice_coef: 0.7467 - val_loss: 0.0591 - val_accuracy: 0.9865 - val_dice_coef: 0.4500

Epoch 00022: val_dice_coef did not improve from 0.45955
Epoch 23/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0105 - accuracy: 0.9956 - dice_coef: 0.7611 - val_loss: 0.0564 - val_accuracy: 0.9866 - val_dice_coef: 0.4608

Epoch 00023: val_dice_coef improved from 0.45955 to 0.46077, saving model to model_save/unet_with_densenet_weights-23-0.4608.hdf5
Epoch 24/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0101 - accuracy: 0.9958 - dice_coef: 0.7686 - val_loss: 0.0580 - val_accuracy: 0.9874 - val_dice_coef: 0.4345

Epoch 00024: val_dice_coef did not improve from 0.46077
Epoch 25/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0093 - accuracy: 0.9961 - dice_coef: 0.7857 - val_loss: 0.0583 - val_accuracy: 0.9869 - val_dice_coef: 0.4420

Epoch 00025: val_dice_coef did not improve from 0.46077
Epoch 26/35
120/120 [==============================] - 45s 376ms/step - loss: 0.0086 - accuracy: 0.9964 - dice_coef: 0.7992 - val_loss: 0.0635 - val_accuracy: 0.9863 - val_dice_coef: 0.4366

Epoch 00026: val_dice_coef did not improve from 0.46077
Epoch 27/35
120/120 [==============================] - 45s 376ms/step - loss: 0.0098 - accuracy: 0.9960 - dice_coef: 0.7816 - val_loss: 0.0636 - val_accuracy: 0.9880 - val_dice_coef: 0.4399

Epoch 00027: val_dice_coef did not improve from 0.46077
Epoch 28/35
120/120 [==============================] - 45s 376ms/step - loss: 0.0090 - accuracy: 0.9962 - dice_coef: 0.7925 - val_loss: 0.0622 - val_accuracy: 0.9878 - val_dice_coef: 0.4320

Epoch 00028: val_dice_coef did not improve from 0.46077
Epoch 29/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0086 - accuracy: 0.9964 - dice_coef: 0.8012 - val_loss: 0.0679 - val_accuracy: 0.9871 - val_dice_coef: 0.4012

Epoch 00029: val_dice_coef did not improve from 0.46077
Epoch 30/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0086 - accuracy: 0.9964 - dice_coef: 0.8018 - val_loss: 0.0626 - val_accuracy: 0.9880 - val_dice_coef: 0.4258

Epoch 00030: val_dice_coef did not improve from 0.46077
Epoch 31/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0084 - accuracy: 0.9965 - dice_coef: 0.8053 - val_loss: 0.0696 - val_accuracy: 0.9873 - val_dice_coef: 0.4016

Epoch 00031: val_dice_coef did not improve from 0.46077
Epoch 32/35
120/120 [==============================] - 45s 376ms/step - loss: 0.0087 - accuracy: 0.9963 - dice_coef: 0.8006 - val_loss: 0.0661 - val_accuracy: 0.9851 - val_dice_coef: 0.4130

Epoch 00032: val_dice_coef did not improve from 0.46077
Epoch 33/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0088 - accuracy: 0.9963 - dice_coef: 0.7978 - val_loss: 0.0809 - val_accuracy: 0.9843 - val_dice_coef: 0.4100

Epoch 00033: val_dice_coef did not improve from 0.46077
Epoch 34/35
120/120 [==============================] - 45s 375ms/step - loss: 0.0081 - accuracy: 0.9965 - dice_coef: 0.8116 - val_loss: 0.0739 - val_accuracy: 0.9852 - val_dice_coef: 0.4204

Epoch 00034: val_dice_coef did not improve from 0.46077
Epoch 35/35
120/120 [==============================] - 45s 376ms/step - loss: 0.0080 - accuracy: 0.9966 - dice_coef: 0.8149 - val_loss: 0.0729 - val_accuracy: 0.9865 - val_dice_coef: 0.4351

Epoch 00035: val_dice_coef did not improve from 0.46077
In [36]:
_ = unet_densenet_model.evaluate(train_ds)
_ = unet_densenet_model.evaluate(val_ds)
120/120 [==============================] - 12s 97ms/step - loss: 0.0182 - accuracy: 0.9938 - dice_coef: 0.7335
30/30 [==============================] - 3s 98ms/step - loss: 0.0729 - accuracy: 0.9865 - dice_coef: 0.4351
In [39]:
unet_densenet_model.save('/content/model_save/unet_with_densenet_weights-23-0.4608.hdf5')
In [40]:
unet_densenet_model.load_weights('/content/model_save/unet_with_densenet_weights-23-0.4608.hdf5')
for i,j in val_ds.take(20):
  a=unet_densenet_model.predict(i)
  preds_val_t = (a[0] > 0.5).astype(np.uint8)
  plt.figure(figsize=(20,6))
  plt.subplot(131)
  plt.title("original image")
  plt.imshow(np.squeeze(i[0]),cmap='gray')
  plt.subplot(132)
  plt.title("ground truth")
  plt.imshow(np.squeeze(j[0]),cmap='gray')
  plt.subplot(133)
  plt.title("predicted mask")
  plt.imshow(np.squeeze(preds_val_t).astype(np.uint8),cmap='gray')
  plt.show()
In [41]:
model.save('/content/drive/My Drive/siim-acr-pneumothorax/pneumothorax/unet_with_densenet_weights-23-0.4608.hdf5')
In [ ]: